In [1]:
import tensorflow as tf
import numpy as np
In [2]:
# Explicitly create a Graph object
graph = tf.Graph()
with graph.as_default():
with tf.name_scope("variables"):
# Variable to keep track of how many times the graph has been run
global_step = tf.Variable(0, dtype=tf.int32, name="global_step")
# Variable that keeps track of the sum of all output values over time:
total_output = tf.Variable(0.0, dtype=tf.float32, name="total_output")
# Primary transformation Operations
with tf.name_scope("transformation"):
# Separate input layer
with tf.name_scope("input"):
# Create input placeholder- takes in a Vector
a = tf.placeholder(tf.float32, shape=[None], name="input_placeholder_a")
# Separate middle layer
with tf.name_scope("intermediate_layer"):
b = tf.reduce_prod(a, name="product_b")
c = tf.reduce_sum(a, name="sum_c")
# Separate output layer
with tf.name_scope("output"):
output = tf.add(b, c, name="output")
with tf.name_scope("update"):
# Increments the total_output Variable by the latest output
update_total = total_output.assign_add(output)
# Increments the above `global_step` Variable, should be run whenever the graph is run
increment_step = global_step.assign_add(1)
# Summary Operations
with tf.name_scope("summaries"):
avg = tf.div(update_total, tf.cast(increment_step, tf.float32), name="average")
# Creates summaries for output node
tf.scalar_summary(b'Output', output, name="output_summary")
tf.scalar_summary(b'Sum of outputs over time', update_total, name="total_summary")
tf.scalar_summary(b'Average of outputs over time', avg, name="average_summary")
# Global Variables and Operations
with tf.name_scope("global_ops"):
# Initialization Op
init = tf.initialize_all_variables()
# Merge all summaries into one Operation
merged_summaries = tf.merge_all_summaries()
# Start a Session, using the explicitly created Graph
sess = tf.Session(graph=graph)
# Open a SummaryWriter to save summaries
writer = tf.train.SummaryWriter('./improved_graph', graph)
# Initialize Variables
sess.run(init)
In [3]:
def run_graph(input_tensor):
"""
Helper function; runs the graph with given input tensor and saves summaries
"""
feed_dict = {a: input_tensor}
out, step, summary = sess.run([output, increment_step, merged_summaries], feed_dict=feed_dict)
writer.add_summary(summary, global_step=step)
In [4]:
# Run the graph with various inputs
run_graph([2,8])
run_graph([3,1,3,3])
run_graph([8])
run_graph([1,2,3])
run_graph([11,4])
run_graph([4,1])
run_graph([7,3,1])
run_graph([6,3])
run_graph([0,2])
run_graph([4,5,6])
In [5]:
# Write the summaries to disk
writer.flush()
In [6]:
# Close the SummaryWriter
writer.close()
In [7]:
# Close the session
sess.close()
To start TensorBoard after running this code, run the following command:
$ tensorboard --logdir='./improved_graph'
In [ ]: